% scribe: Daisy Yan Huang & Jing Lei % lastupdate: 05 December 2005 % lecture: 25 % references: Durrett, Section 4.6 % title: Reversed Martingales % keywords: backwards martingales, reversed Martingales, exchangeable sequences, Hewitt-Savage 0-1 law, de Finetti's Theorem % end \documentclass[12pt, letterpaper]{article} \include{macros} \begin{document} \lecture{25} {Reversed Martingales} {Daisy Huang, Jing Lei}{yanhuang@stat,jinglei@stat} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% References: \cite{durrett}, section 4.6. \section{Exchangeable $\sigma$-field} % keywords: exchangeable sequences, exchangeable sigma fields % end Let $X_1, X_2,...$ be a sequence of real-valued random variables. Define $\mathcal{E}_n=\sigma(f(X_1, X_2,...))$ where $f(X_1, X_2,...)$ is symmetric with respect to the first n variables; i.e., $f(X_1, X_2,...)=f(X_{\pi(1)}, X_{\pi(2)},...)$ where $\pi \mbox{ is a permutation such that }\pi:\{1,2,...n\} \rightarrow \{1,2,...n\}$ and $\pi(k)=k$ for $k>n$ and $f$ is product Borel measurable. $\mathcal{E}_{\infty}=\bigcap_n\mathcal{E}_n$ is called the \emph{exchangeable $\sigma$-field}. \textbf{Remark}: \begin{enumerate} \item X is $\mathcal{E}_n$-measurable iff $X=f(X_1, X_2,...)$ for some such $f$. \item $\mathcal{E}_n \supseteq \mathcal{E}_{n+1}$ because $\mathcal{E}_{n+1}$ requires more symmetries. Also, $\mathcal{E}_n \downarrow \mathcal{E}_{\infty}$ since \begin{eqnarray} \label{exch-sigma-field} \mathcal{E}_{\infty} &\supseteq & \mbox{tail $\sigma$-field of } \{X_1, X_2,...\}\\ & = & \bigcap_n \sigma(X_n, X_{n+1},...) \end{eqnarray} (noting that $\sigma(X_{n+1}, X_{n+2},...)\subseteq \mathcal{E}_n$ and shifting the index $n$). \end{enumerate} The ``basic'' symmetric functions of $X_1,X_2,...X_n$ are the order statistics $$ \label{order-stat} \min_{1\leq i \leq n}X_i= X_{n,1}\leq X_{n,2} \leq ...\leq X_{n,n}=\max_{1\leq i \leq n}X_i. $$ Obviously, each $X_{n,k}$ is a symmetric function of $X_1,X_2,...X_n$. You can check that $\mathcal{E}_n = \sigma(X_{n,1},X_{n,2},...X_{n,n},X_{n+1},X_{n+2},...)$. \begin{example} $S_n=X_1+X_2+...+X_n$ is in $\mathcal{E}_n$ but not in the tail $\sigma$-field of $\{X_1, X_2,...\}$. \end{example} \section{Hewitt-Savage $0-1$ Law} \begin{theorem}[Hewitt-Savage $0-1$ Law] If $X_1, X_2,...$ is i.i.d., then every event in $\mathcal{E}_\infty$ has probability $0$ or $1$. \end{theorem} (Compare this with Kolmogorov's $0-1$ Law.) Recall that if $X_1, X_2,...$ are i.i.d. and exchangeable (i.e., % $$(X_1,X_2,...X_n) \overset{d}= (X_{\pi(1)}, X_{\pi(2)},...,X_{\pi(n)})$$ % for all permutations $\pi$ on $n$ elements) and $\E{|X_1|}<\infty$, then $(S_n/n, \mathcal{E}_n)_{n\geq 1}$ is a reversed martingale. If $S_n/n=\E{(X_1|\mathcal{E}_n)}$, then this is obvious because $\mathcal{E}_n \downarrow$. To see this: exchangeability implies that $$ \label{claim-HS} \E{(X_1|\mathcal{E}_n)} =\E{(X_k|\mathcal{E}_n)}\qquad \mbox{for every $1\leq k \leq n$}. $$ This is because \begin{eqnarray*} && (X_1,f(X_1,\cdots,X_n,X_{n+1},X_{n+2}, \cdots)) \\ && \overset{d}= \, (X_k,f(X_k,\cdots,X_{k-1},X_1,X_{k+1}, \cdots ,X_n,X_{n+1},X_{n+2},\cdots))\\ && = \,(X_k, f(X_1,X_2,...,X_n,X_{n+1},X_{n+2}...)) \end{eqnarray*} Now check the definition of the claim: $$ n \E{(X_1|\mathcal{E}_n)}=\E{(S_n|\mathcal{E}_n)}=S_n \mbox{ since } S_n\subseteq \mathcal{E}_n. $$ Now, we prove the theorem. The plan is to show that for every event $F\in \sigma(X_1,...,X_{n})$, $\P(F|\mathcal{E}_\infty)=\P(F)$. This says that $\sigma(X_1,...,X_{n})$ is independent of $\mathcal{E}_\infty$. To see this, let $n \rightarrow \infty$ and learn that $\sigma(X_1,X_2,...)$ is independent of $\mathcal{E}_\infty$. But this implies that $\mathcal{E}_n$ is independent of $\mathcal{E}_\infty$, which leads to the result of the $0-1$ Law. \begin{proof} By the Martingale Convergence Theorem, $$ \label{HS-proof} \P(F|\mathcal{E}_\infty)=\lim_{n\rightarrow\infty}{\P(F|\mathcal{E}_n)} $$ Let $F=\{ X_1 \in \hat{F} \}$ for some $\hat{F}\subseteq \R$. Then, \begin{eqnarray*} \P(F \mid \mathcal{E}_n) &=& \E(\1_{\hat{F}(X_1)}|\mathcal{E}_n)\\ &=& \E(\1_{\hat{F}(X_k)}|\mathcal{E}_n)\\ &=& \E\!\left(\frac{1}{n}\sum_{k=1}^{n} \1_{\hat{F}(X_k)} | \mathcal{E}_n\right)\\ &=& \frac{1}{n}\sum_{k=1}^{n} {\1_{\hat{F}(X_k)}}\\ &\rightarrow & \E{(\1_{\hat{F}(X_k)})}\\ &=& \P(X_1\in \hat{F})\\ &=& \P(F). \end{eqnarray*} This proves for the case $F \in \sigma(X_1)$. To deal with the case $F \in \sigma(X_1, X_2)$, we do the same thing. Let $F=\{ (X_1, X_2) \in \hat{F} \}$ for some $\hat{F}\subseteq \R^2$ and $\varphi(X_1,X_2)=\1((X_1,X_2)\in\hat{F})$. Then, % \begin{eqnarray*} \P(F \mid \mathcal{E}_n) &=& \E(\varphi{(X_1,X_2)}|\mathcal{E}_n)\\ &=& \E(\varphi{(X_i,X_j)}|\mathcal{E}_n) \mbox{ for } i\neq j\\ &=& \E\!\left({1\over {n(n-1)}}\sum_{1\leq i \neq j\leq n} \ {\varphi{(X_i,X_j)}} | \mathcal{E}_n\right)\\ &=& {1\over {n(n-1)}}\sum_{1\leq i \neq j\leq n} \ {\varphi{(X_i,X_j)}}. \end{eqnarray*} Consider $\varphi{(X_i,X_j)}=f(X_i)g(X_j)$, i.e.\ $\hat{F}$ is rectangular; then % \begin{eqnarray*} \P(F \mid \mathcal{E}_n) &=& \frac{1}{n(n-1)}\sum_{1\leq i \neq j\leq n} f(X_i) g(X_j)\\ &=& \frac{1}{n(n-1)}\left(\sum_{1\leq i,j \leq n} f(X_i) g(X_j) - \sum_{i=1}^{n} f(X_i) g(X_i) \right)\\ &=& \frac{1}{n(n-1)}\left(\sum_{i=1}^{n}f(X_i) \sum_{i=1}^{n} g(X_i)-\sum_{i=1}^{n} f(X_i) g(X_i) \right)\\ &\ascv & \E f(X_1) \E g(X_2)\\ &=& \E \varphi(X_1,X_2)\\ &=& \P(F). \end{eqnarray*} To finish, for $\hat{F}\in \borel(\R ^2)$ we just use the $\pi-\lambda$ theorem. Similarly for $\hat{F}\in \borel(\R ^k)$, $k\geq 3$. So far, we've shown that $$ \P(F \mid \mathcal{E}_{\infty})=\P(F) \mbox{ for all } F\in \sigma(X_1,\cdots,X_n), $$ i.e.\ $\mathcal{E}_{\infty}$ is independent of $\sigma(X_1,\cdots,X_n)$. Similarly as in the proof of Kolmogorov's 0-1 Law, we can learn that $\mathcal{E}_{\infty}$ is independent of $\sigma(X_1,X_2,\cdots)$ by sending $n\rightarrow \infty$. Thus $\mathcal{E}_{\infty}$ is independent of itself, which completes the proof. \end{proof} \section{de Finetti's Theorem} \begin{theorem} If $X_1,X_2,\cdots,$ are exchangeable, and $F_n(x):=\frac{1}{n}\sum_{1\leq i\leq n}\1(X_i\leq x)$, then % $$\lim_{n\toinf}\sup_x|F_n(x)-F(x)|=0$$ for some random CDF $(F(x), x\in \R)$. Also, given $\mathcal{E}_{\infty}$, the $X_1,X_2,\cdots,$ are i.i.d.\ with common distribution $F$; i.e. \begin{equation}\label{exch_indpt} \P(X_1\leq x_1,X_2\leq x_2,\cdots,X_k\leq x_k|\mathcal{E}_{\infty})=F(x_1)F(x_2)\cdot\cdots F(x_k). \end{equation} \end{theorem} \textbf{Note:} Conceptually, it's as if $F$ were first picked at random in some way from the set of probability CDF's on $\R$, and then we sample from $F$. \begin{proofsketch} (This sketch proof is incomplete -- see the text book for details.) \begin{enumerate} \item Look at \eqref{exch_indpt} for $k=1$, and by MGCT and exchangeability we have % \begin{eqnarray*} F(x)&=&\P(X_1\leq x|\mathcal{E}_{\infty})\\ &=&\lim_{n\toinf}\P(X_1\leq x|\mathcal{E}_n) \mbox{ a.s.}\\ &=&\lim_{n\toinf}F_n(x) \mbox{ a.s.} \end{eqnarray*} \item Using the fact that $F_n(x)$ is non-decreasing in $x$ for fixed $n$, we learn that $F(x)$ is non-decreasing in $x$ almost surely. \item Clean up over rationals: let $$F^{\star}(x):=\lim_{q\downarrow x}F(q),$$ where $q\in \Q$, the set of all rational numbers. Then argue that $F^{\star}(x)$ is a CDF and replace $F(x)$ by $F^{\star}(x)$. \end{enumerate} \end{proofsketch} \bibliographystyle{plain} \bibliography{../books.bib} \end{document}